FullConnection
传入输入矩阵与权重矩阵,执行全连接计算(矩阵乘法),并可选择性地叠加偏置项与激活函数, 最终输出结果矩阵。
\[ \begin{align}\begin{aligned}dst_{i,j} = \sum_{k=0}^{K-1} A_{i,k} \cdot B_{k,j} + bias_{i,j}\\dst_{i,j} = activation(dst_{i,j})\end{aligned}\end{align} \]
其中激活函数支持 ReLU 与 ReLU6。
- 输入:
A - 输入矩阵地址,形状为
M × K。B - 权重矩阵地址,形状为
K × N。bias - 偏置矩阵地址,形状为
M × N,可为NULL。M - 输出矩阵行数。
N - 输出矩阵列数。
K - 中间维度大小。
activation_type - 激活函数类型。
core_mask - 核掩码(仅适用于共享存储版本)。
- 输出:
C - 输出矩阵地址,形状为
M × N。
- 支持平台:
FT78NEMT7004
备注
FT78NE 支持fp, int8
MT7004 支持hp, fp
activation_type 支持
ACTIVATION_NONE、ACTIVATION_RELU、ACTIVATION_RELU6
共享存储版本:
-
void i8_fullconnection_s(int8_t *A, int8_t *B, int8_t *C, int8_t *bias, int M, int N, int K, int activation_type, int core_mask)
-
void fp_fullconnection_s(float *A, float *B, float *C, float *bias, int M, int N, int K, int activation_type, int core_mask)
-
void hp_fullconnection_s(half *A, half *B, half *C, half *bias, int M, int N, int K, int activation_type, int core_mask)
C调用示例:
1//FT78NE示例
2#include <stdio.h>
3#include <fullconnection.h>
4
5int main(int argc, char* argv[]) {
6 float *A = (float *)0xA0000000;
7 float *B = (float *)0xA0010000;
8 float *C = (float *)0xC0000000;
9 float *bias = NULL;
10 int M = 4, N = 8, K = 16;
11 int activation_type = ACTIVATION_RELU;
12 int core_mask = 0xff;
13 fp_fullconnection_s(A, B, C, bias, M, N, K, activation_type, core_mask);
14 return 0;
15}
私有存储版本:
-
void i8_fullconnection_p(int8_t *A, int8_t *B, int8_t *C, int8_t *bias, int M, int N, int K, int activation_type)
-
void fp_fullconnection_p(float *A, float *B, float *C, float *bias, int M, int N, int K, int activation_type)
-
void hp_fullconnection_p(half *A, half *B, half *C, half *bias, int M, int N, int K, int activation_type)
C调用示例:
1//FT78NE示例
2#include <stdio.h>
3#include <fullconnection.h>
4
5int main(int argc, char* argv[]) {
6 float *A = (float *)0x10810000;
7 float *B = (float *)0x10820000;
8 float *C = (float *)0x10830000;
9 float *bias = NULL;
10 int M = 4, N = 8, K = 16;
11 int activation_type = ACTIVATION_RELU6;
12 fp_fullconnection_p(A, B, C, bias, M, N, K, activation_type);
13 return 0;
14}